func runtime.add
262 uses
runtime (current package)
alg.go#L177: h = typehash(a.Elem, add(p, i*a.Elem.Size_), h)
alg.go#L186: h = typehash(f.Typ, add(p, f.Offset), h)
arena.go#L628: userArenaHeapBitsSetType(typ, add(ptr, uintptr(i)*typ.Size_), base)
cgocall.go#L481: p = add(p, at.Elem.Size_)
cgocall.go#L507: p = *(*unsafe.Pointer)(add(p, goarch.PtrSize))
cgocall.go#L530: p = add(p, st.Elem.Size_)
cgocall.go#L553: cgoCheckArg(f.Typ, add(p, f.Offset), true, top, msg)
cgocheck.go#L125: p = add(p, typ.Size_)
cgocheck.go#L153: cgoCheckBits(add(src, -doff), datap.gcdatamask.bytedata, off+doff, size)
cgocheck.go#L158: cgoCheckBits(add(src, -boff), datap.gcbssmask.bytedata, off+boff, size)
cgocheck.go#L203: src = add(src, skipBytes)
cgocheck.go#L218: v := *(*unsafe.Pointer)(add(src, i))
cgocheck.go#L261: src = add(src, at.Elem.Size_)
cgocheck.go#L279: src = add(src, f.Typ.Size_)
chan.go#L103: c.buf = add(unsafe.Pointer(c), hchanSize)
chan.go#L123: return add(c.buf, uintptr(i)*uintptr(c.elemsize))
checkptr.go#L42: end := add(ptr, size-1)
hash64.go#L33: a |= uintptr(*(*byte)(add(p, s>>1))) << 8
hash64.go#L34: a |= uintptr(*(*byte)(add(p, s-1))) << 16
hash64.go#L40: b = r4(add(p, s-4))
hash64.go#L46: b = r8(add(p, s-8))
hash64.go#L53: seed = mix(r8(p)^m2, r8(add(p, 8))^seed)
hash64.go#L54: seed1 = mix(r8(add(p, 16))^m3, r8(add(p, 24))^seed1)
hash64.go#L55: seed2 = mix(r8(add(p, 32))^m4, r8(add(p, 40))^seed2)
hash64.go#L56: p = add(p, 48)
hash64.go#L61: seed = mix(r8(p)^m2, r8(add(p, 8))^seed)
hash64.go#L62: p = add(p, 16)
hash64.go#L64: a = r8(add(p, l-16))
hash64.go#L65: b = r8(add(p, l-8))
iface.go#L104: p := (**itab)(add(unsafe.Pointer(&t.entries), h*goarch.PtrSize))
iface.go#L165: p := (**itab)(add(unsafe.Pointer(&t.entries), h*goarch.PtrSize))
iface.go#L203: xmhdr := (*[1 << 16]abi.Method)(add(unsafe.Pointer(x), uintptr(x.Moff)))[:nt:nt]
iface.go#L357: x = add(x, 6)
iface.go#L370: x = add(x, 4)
iface.go#L486: m := *(**itab)(add(unsafe.Pointer(&t.entries), i*goarch.PtrSize))
malloc.go#L1263: x = add(x, size-dataSize)
map.go#L210: return *(**bmap)(add(unsafe.Pointer(b), uintptr(t.BucketSize)-goarch.PtrSize))
map.go#L214: *(**bmap)(add(unsafe.Pointer(b), uintptr(t.BucketSize)-goarch.PtrSize)) = ovf
map.go#L218: return add(unsafe.Pointer(b), dataOffset)
map.go#L255: h.extra.nextOverflow = (*bmap)(add(unsafe.Pointer(ovf), uintptr(t.BucketSize)))
map.go#L384: nextOverflow = (*bmap)(add(buckets, base*uintptr(t.BucketSize)))
map.go#L385: last := (*bmap)(add(buckets, (nbuckets-1)*uintptr(t.BucketSize)))
map.go#L420: b := (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map.go#L426: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map.go#L441: k := add(unsafe.Pointer(b), dataOffset+i*uintptr(t.KeySize))
map.go#L446: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*uintptr(t.KeySize)+i*uintptr(t.ValueSize))
map.go#L481: b := (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map.go#L487: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map.go#L502: k := add(unsafe.Pointer(b), dataOffset+i*uintptr(t.KeySize))
map.go#L507: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*uintptr(t.KeySize)+i*uintptr(t.ValueSize))
map.go#L525: b := (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map.go#L531: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map.go#L546: k := add(unsafe.Pointer(b), dataOffset+i*uintptr(t.KeySize))
map.go#L551: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*uintptr(t.KeySize)+i*uintptr(t.ValueSize))
map.go#L613: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map.go#L625: insertk = add(unsafe.Pointer(b), dataOffset+i*uintptr(t.KeySize))
map.go#L626: elem = add(unsafe.Pointer(b), dataOffset+bucketCnt*uintptr(t.KeySize)+i*uintptr(t.ValueSize))
map.go#L633: k := add(unsafe.Pointer(b), dataOffset+i*uintptr(t.KeySize))
map.go#L644: elem = add(unsafe.Pointer(b), dataOffset+bucketCnt*uintptr(t.KeySize)+i*uintptr(t.ValueSize))
map.go#L667: insertk = add(unsafe.Pointer(newb), dataOffset)
map.go#L668: elem = add(insertk, bucketCnt*uintptr(t.KeySize))
map.go#L729: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map.go#L741: k := add(unsafe.Pointer(b), dataOffset+i*uintptr(t.KeySize))
map.go#L755: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*uintptr(t.KeySize)+i*uintptr(t.ValueSize))
map.go#L896: b = (*bmap)(add(h.oldbuckets, oldbucket*uintptr(t.BucketSize)))
map.go#L900: b = (*bmap)(add(it.buckets, bucket*uintptr(t.BucketSize)))
map.go#L904: b = (*bmap)(add(it.buckets, bucket*uintptr(t.BucketSize)))
map.go#L921: k := add(unsafe.Pointer(b), dataOffset+uintptr(offi)*uintptr(t.KeySize))
map.go#L925: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*uintptr(t.KeySize)+uintptr(offi)*uintptr(t.ValueSize))
map.go#L1014: b := (*bmap)(add(bucket, i*uintptr(t.BucketSize)))
map.go#L1157: b := (*bmap)(add(h.oldbuckets, bucket*uintptr(t.BucketSize)))
map.go#L1170: b := (*bmap)(add(h.oldbuckets, oldbucket*uintptr(t.BucketSize)))
map.go#L1179: x.b = (*bmap)(add(h.buckets, oldbucket*uintptr(t.BucketSize)))
map.go#L1180: x.k = add(unsafe.Pointer(x.b), dataOffset)
map.go#L1181: x.e = add(x.k, bucketCnt*uintptr(t.KeySize))
map.go#L1187: y.b = (*bmap)(add(h.buckets, (oldbucket+newbit)*uintptr(t.BucketSize)))
map.go#L1188: y.k = add(unsafe.Pointer(y.b), dataOffset)
map.go#L1189: y.e = add(y.k, bucketCnt*uintptr(t.KeySize))
map.go#L1193: k := add(unsafe.Pointer(b), dataOffset)
map.go#L1194: e := add(k, bucketCnt*uintptr(t.KeySize))
map.go#L1195: for i := 0; i < bucketCnt; i, k, e = i+1, add(k, uintptr(t.KeySize)), add(e, uintptr(t.ValueSize)) {
map.go#L1244: dst.k = add(unsafe.Pointer(dst.b), dataOffset)
map.go#L1245: dst.e = add(dst.k, bucketCnt*uintptr(t.KeySize))
map.go#L1263: dst.k = add(dst.k, uintptr(t.KeySize))
map.go#L1264: dst.e = add(dst.e, uintptr(t.ValueSize))
map.go#L1269: b := add(h.oldbuckets, oldbucket*uintptr(t.BucketSize))
map.go#L1272: ptr := add(b, dataOffset)
map.go#L1477: srcK := add(unsafe.Pointer(src), dataOffset+uintptr(i)*uintptr(t.KeySize))
map.go#L1478: srcEle := add(unsafe.Pointer(src), dataOffset+bucketCnt*uintptr(t.KeySize)+uintptr(i)*uintptr(t.ValueSize))
map.go#L1479: dstK := add(unsafe.Pointer(dst), dataOffset+uintptr(pos)*uintptr(t.KeySize))
map.go#L1480: dstEle := add(unsafe.Pointer(dst), dataOffset+bucketCnt*uintptr(t.KeySize)+uintptr(pos)*uintptr(t.ValueSize))
map.go#L1527: dstBmap := (*bmap)(add(dst.buckets, uintptr(i*int(t.BucketSize))))
map.go#L1530: srcBmap := (*bmap)(add(src.buckets, uintptr((i+j)*int(t.BucketSize))))
map.go#L1550: srcBmap := (*bmap)(add(srcOldbuckets, uintptr(i*int(t.BucketSize))))
map.go#L1556: dstBmap := (*bmap)(add(dst.buckets, (uintptr(i)&bucketMask(dst.B))*uintptr(t.BucketSize)))
map.go#L1579: srcK := add(unsafe.Pointer(srcBmap), dataOffset+i*uintptr(t.KeySize))
map.go#L1584: srcEle := add(unsafe.Pointer(srcBmap), dataOffset+bucketCnt*uintptr(t.KeySize)+i*uintptr(t.ValueSize))
map.go#L1619: b := (*bmap)(add(buckets, uintptr(bucket)*uintptr(t.BucketSize)))
map.go#L1627: b := (*bmap)(add(h.oldbuckets, uintptr(bucket)*uintptr(t.BucketSize)))
map.go#L1647: k := add(unsafe.Pointer(b), dataOffset+offi*uintptr(t.KeySize))
map.go#L1654: typedmemmove(t.Key, add(s.array, uintptr(s.len)*uintptr(t.KeySize)), k)
map.go#L1682: b := (*bmap)(add(buckets, uintptr(bucket)*uintptr(t.BucketSize)))
map.go#L1690: b := (*bmap)(add(h.oldbuckets, uintptr(bucket)*uintptr(t.BucketSize)))
map.go#L1712: ele := add(unsafe.Pointer(b), dataOffset+bucketCnt*uintptr(t.KeySize)+offi*uintptr(t.ValueSize))
map.go#L1719: typedmemmove(t.Elem, add(s.array, uintptr(s.len)*uintptr(t.ValueSize)), ele)
map_fast32.go#L31: b = (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map_fast32.go#L37: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map_fast32.go#L44: for i, k := uintptr(0), b.keys(); i < bucketCnt; i, k = i+1, add(k, 4) {
map_fast32.go#L46: return add(unsafe.Pointer(b), dataOffset+bucketCnt*4+i*uintptr(t.ValueSize))
map_fast32.go#L71: b = (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map_fast32.go#L77: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map_fast32.go#L84: for i, k := uintptr(0), b.keys(); i < bucketCnt; i, k = i+1, add(k, 4) {
map_fast32.go#L86: return add(unsafe.Pointer(b), dataOffset+bucketCnt*4+i*uintptr(t.ValueSize)), true
map_fast32.go#L118: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_fast32.go#L137: k := *((*uint32)(add(unsafe.Pointer(b), dataOffset+i*4)))
map_fast32.go#L168: insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*4)
map_fast32.go#L175: elem := add(unsafe.Pointer(insertb), dataOffset+bucketCnt*4+inserti*uintptr(t.ValueSize))
map_fast32.go#L208: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_fast32.go#L227: k := *((*unsafe.Pointer)(add(unsafe.Pointer(b), dataOffset+i*4)))
map_fast32.go#L258: insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*4)
map_fast32.go#L265: elem := add(unsafe.Pointer(insertb), dataOffset+bucketCnt*4+inserti*uintptr(t.ValueSize))
map_fast32.go#L294: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_fast32.go#L298: for i, k := uintptr(0), b.keys(); i < bucketCnt; i, k = i+1, add(k, 4) {
map_fast32.go#L310: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*4+i*uintptr(t.ValueSize))
map_fast32.go#L375: b := (*bmap)(add(h.oldbuckets, oldbucket*uintptr(t.BucketSize)))
map_fast32.go#L384: x.b = (*bmap)(add(h.buckets, oldbucket*uintptr(t.BucketSize)))
map_fast32.go#L385: x.k = add(unsafe.Pointer(x.b), dataOffset)
map_fast32.go#L386: x.e = add(x.k, bucketCnt*4)
map_fast32.go#L392: y.b = (*bmap)(add(h.buckets, (oldbucket+newbit)*uintptr(t.BucketSize)))
map_fast32.go#L393: y.k = add(unsafe.Pointer(y.b), dataOffset)
map_fast32.go#L394: y.e = add(y.k, bucketCnt*4)
map_fast32.go#L398: k := add(unsafe.Pointer(b), dataOffset)
map_fast32.go#L399: e := add(k, bucketCnt*4)
map_fast32.go#L400: for i := 0; i < bucketCnt; i, k, e = i+1, add(k, 4), add(e, uintptr(t.ValueSize)) {
map_fast32.go#L425: dst.k = add(unsafe.Pointer(dst.b), dataOffset)
map_fast32.go#L426: dst.e = add(dst.k, bucketCnt*4)
map_fast32.go#L444: dst.k = add(dst.k, 4)
map_fast32.go#L445: dst.e = add(dst.e, uintptr(t.ValueSize))
map_fast32.go#L450: b := add(h.oldbuckets, oldbucket*uintptr(t.BucketSize))
map_fast32.go#L453: ptr := add(b, dataOffset)
map_fast64.go#L31: b = (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map_fast64.go#L37: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map_fast64.go#L44: for i, k := uintptr(0), b.keys(); i < bucketCnt; i, k = i+1, add(k, 8) {
map_fast64.go#L46: return add(unsafe.Pointer(b), dataOffset+bucketCnt*8+i*uintptr(t.ValueSize))
map_fast64.go#L71: b = (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map_fast64.go#L77: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map_fast64.go#L84: for i, k := uintptr(0), b.keys(); i < bucketCnt; i, k = i+1, add(k, 8) {
map_fast64.go#L86: return add(unsafe.Pointer(b), dataOffset+bucketCnt*8+i*uintptr(t.ValueSize)), true
map_fast64.go#L118: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_fast64.go#L137: k := *((*uint64)(add(unsafe.Pointer(b), dataOffset+i*8)))
map_fast64.go#L168: insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*8)
map_fast64.go#L175: elem := add(unsafe.Pointer(insertb), dataOffset+bucketCnt*8+inserti*uintptr(t.ValueSize))
map_fast64.go#L208: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_fast64.go#L227: k := *((*unsafe.Pointer)(add(unsafe.Pointer(b), dataOffset+i*8)))
map_fast64.go#L258: insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*8)
map_fast64.go#L265: elem := add(unsafe.Pointer(insertb), dataOffset+bucketCnt*8+inserti*uintptr(t.ValueSize))
map_fast64.go#L294: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_fast64.go#L298: for i, k := uintptr(0), b.keys(); i < bucketCnt; i, k = i+1, add(k, 8) {
map_fast64.go#L312: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*8+i*uintptr(t.ValueSize))
map_fast64.go#L377: b := (*bmap)(add(h.oldbuckets, oldbucket*uintptr(t.BucketSize)))
map_fast64.go#L386: x.b = (*bmap)(add(h.buckets, oldbucket*uintptr(t.BucketSize)))
map_fast64.go#L387: x.k = add(unsafe.Pointer(x.b), dataOffset)
map_fast64.go#L388: x.e = add(x.k, bucketCnt*8)
map_fast64.go#L394: y.b = (*bmap)(add(h.buckets, (oldbucket+newbit)*uintptr(t.BucketSize)))
map_fast64.go#L395: y.k = add(unsafe.Pointer(y.b), dataOffset)
map_fast64.go#L396: y.e = add(y.k, bucketCnt*8)
map_fast64.go#L400: k := add(unsafe.Pointer(b), dataOffset)
map_fast64.go#L401: e := add(k, bucketCnt*8)
map_fast64.go#L402: for i := 0; i < bucketCnt; i, k, e = i+1, add(k, 8), add(e, uintptr(t.ValueSize)) {
map_fast64.go#L427: dst.k = add(unsafe.Pointer(dst.b), dataOffset)
map_fast64.go#L428: dst.e = add(dst.k, bucketCnt*8)
map_fast64.go#L452: dst.k = add(dst.k, 8)
map_fast64.go#L453: dst.e = add(dst.e, uintptr(t.ValueSize))
map_fast64.go#L458: b := add(h.oldbuckets, oldbucket*uintptr(t.BucketSize))
map_fast64.go#L461: ptr := add(b, dataOffset)
map_faststr.go#L30: for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
map_faststr.go#L39: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+i*uintptr(t.ValueSize))
map_faststr.go#L46: for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
map_faststr.go#L55: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+i*uintptr(t.ValueSize))
map_faststr.go#L62: if *((*[4]byte)(add(key.str, uintptr(key.len)-4))) != *((*[4]byte)(add(k.str, uintptr(key.len)-4))) {
map_faststr.go#L72: k := (*stringStruct)(add(unsafe.Pointer(b), dataOffset+keymaybe*2*goarch.PtrSize))
map_faststr.go#L74: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+keymaybe*uintptr(t.ValueSize))
map_faststr.go#L82: b := (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map_faststr.go#L88: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map_faststr.go#L95: for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
map_faststr.go#L101: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+i*uintptr(t.ValueSize))
map_faststr.go#L125: for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
map_faststr.go#L134: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+i*uintptr(t.ValueSize)), true
map_faststr.go#L141: for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
map_faststr.go#L150: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+i*uintptr(t.ValueSize)), true
map_faststr.go#L157: if *((*[4]byte)(add(key.str, uintptr(key.len)-4))) != *((*[4]byte)(add(k.str, uintptr(key.len)-4))) {
map_faststr.go#L167: k := (*stringStruct)(add(unsafe.Pointer(b), dataOffset+keymaybe*2*goarch.PtrSize))
map_faststr.go#L169: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+keymaybe*uintptr(t.ValueSize)), true
map_faststr.go#L177: b := (*bmap)(add(h.buckets, (hash&m)*uintptr(t.BucketSize)))
map_faststr.go#L183: oldb := (*bmap)(add(c, (hash&m)*uintptr(t.BucketSize)))
map_faststr.go#L190: for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
map_faststr.go#L196: return add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+i*uintptr(t.ValueSize)), true
map_faststr.go#L229: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_faststr.go#L249: k := (*stringStruct)(add(unsafe.Pointer(b), dataOffset+i*2*goarch.PtrSize))
map_faststr.go#L287: insertk = add(unsafe.Pointer(insertb), dataOffset+inserti*2*goarch.PtrSize)
map_faststr.go#L293: elem := add(unsafe.Pointer(insertb), dataOffset+bucketCnt*2*goarch.PtrSize+inserti*uintptr(t.ValueSize))
map_faststr.go#L323: b := (*bmap)(add(h.buckets, bucket*uintptr(t.BucketSize)))
map_faststr.go#L328: for i, kptr := uintptr(0), b.keys(); i < bucketCnt; i, kptr = i+1, add(kptr, 2*goarch.PtrSize) {
map_faststr.go#L338: e := add(unsafe.Pointer(b), dataOffset+bucketCnt*2*goarch.PtrSize+i*uintptr(t.ValueSize))
map_faststr.go#L403: b := (*bmap)(add(h.oldbuckets, oldbucket*uintptr(t.BucketSize)))
map_faststr.go#L412: x.b = (*bmap)(add(h.buckets, oldbucket*uintptr(t.BucketSize)))
map_faststr.go#L413: x.k = add(unsafe.Pointer(x.b), dataOffset)
map_faststr.go#L414: x.e = add(x.k, bucketCnt*2*goarch.PtrSize)
map_faststr.go#L420: y.b = (*bmap)(add(h.buckets, (oldbucket+newbit)*uintptr(t.BucketSize)))
map_faststr.go#L421: y.k = add(unsafe.Pointer(y.b), dataOffset)
map_faststr.go#L422: y.e = add(y.k, bucketCnt*2*goarch.PtrSize)
map_faststr.go#L426: k := add(unsafe.Pointer(b), dataOffset)
map_faststr.go#L427: e := add(k, bucketCnt*2*goarch.PtrSize)
map_faststr.go#L428: for i := 0; i < bucketCnt; i, k, e = i+1, add(k, 2*goarch.PtrSize), add(e, uintptr(t.ValueSize)) {
map_faststr.go#L453: dst.k = add(unsafe.Pointer(dst.b), dataOffset)
map_faststr.go#L454: dst.e = add(dst.k, bucketCnt*2*goarch.PtrSize)
map_faststr.go#L467: dst.k = add(dst.k, 2*goarch.PtrSize)
map_faststr.go#L468: dst.e = add(dst.e, uintptr(t.ValueSize))
map_faststr.go#L473: b := add(h.oldbuckets, oldbucket*uintptr(t.BucketSize))
map_faststr.go#L476: ptr := add(b, dataOffset)
mgcmark.go#L278: ptrmask := (*uint8)(add(unsafe.Pointer(ptrmask0), uintptr(shard)*(rootBlockBytes/(8*goarch.PtrSize))))
mgcsweep.go#L937: *(*uint32)(add(x, i)) = 0xdeadbeef
mpagealloc_64bit.go#L122: offAddr{uintptr(add(base, baseOffset))},
mpagealloc_64bit.go#L123: offAddr{uintptr(add(base, limitOffset))},
mprof.go#L233: stk := (*[maxStack]uintptr)(add(unsafe.Pointer(b), unsafe.Sizeof(*b)))
mprof.go#L242: data := add(unsafe.Pointer(b), unsafe.Sizeof(*b)+b.nstk*unsafe.Sizeof(uintptr(0)))
mprof.go#L251: data := add(unsafe.Pointer(b), unsafe.Sizeof(*b)+b.nstk*unsafe.Sizeof(uintptr(0)))
mspanset.go#L299: return (*atomic.Pointer[spanSetBlock])(add(unsafe.Pointer(s.p), goarch.PtrSize*idx))
netpoll.go#L667: pd := (*pollDesc)(add(mem, i*pdSize))
os_linux.go#L244: auxvp := (*[1 << 28]uintptr)(add(unsafe.Pointer(argv), uintptr(n)*goarch.PtrSize))
panic.go#L733: fd = add(fd, unsafe.Sizeof(b))
proc.go#L599: return *(**g)(add(unsafe.Pointer(ptr), i*goarch.PtrSize))
proc.go#L6736: firstFunc := add(unsafe.Pointer(t), 8)
proc.go#L6738: p := add(firstFunc, uintptr(i)*goarch.PtrSize)
runtime1.go#L63: return *(**byte)(add(unsafe.Pointer(argv), uintptr(i)*goarch.PtrSize))
signal_linux_amd64.go#L55: *(*uintptr)(add(unsafe.Pointer(c.info), 2*goarch.PtrSize)) = uintptr(x)
slice.go#L59: memclrNoHeapPointers(add(to, copymem), tomem-copymem)
slice.go#L271: memclrNoHeapPointers(add(p, newlenmem), capmem-newlenmem)
slice.go#L299: memclrNoHeapPointers(add(new.array, oldcapmem), newlenmem-oldcapmem)
stack.go#L612: print(" ", add(scanp, (i+j)*goarch.PtrSize), ":", ptrnames[bv.ptrbit(i+j)], ":", hex(*(*uintptr)(add(scanp, (i+j)*goarch.PtrSize))), " # ", i, " ", *addb(bv.bytedata, i/8), "\n")
stack.go#L619: pp := (*uintptr)(add(scanp, (i+j)*goarch.PtrSize))
stkframe.go#L248: p = add(p, goarch.PtrSize)
string.go#L103: p = add(p, 7)
string.go#L276: memclrNoHeapPointers(add(p, uintptr(size)), cap-uintptr(size))
string.go#L291: memclrNoHeapPointers(add(p, uintptr(size)*4), mem-uintptr(size)*4)
stubs.go#L17: func add(p unsafe.Pointer, x uintptr) unsafe.Pointer {
symtab.go#L788: ffb := (*findfuncbucket)(add(unsafe.Pointer(datap.findfunctab), b*unsafe.Sizeof(findfuncbucket{})))
symtab.go#L1022: return *(*uint32)(add(unsafe.Pointer(&f.nfuncdata), unsafe.Sizeof(f.nfuncdata)+uintptr(table)*4))
traceback.go#L696: bits := *(*uint8)(add(liveInfo, uintptr(liveIdx)+uintptr(slotIdx/8)))
traceback.go#L701: x := readUnaligned64(add(argp, uintptr(off)))
vdso_linux.go#L113: pt := (*elfPhdr)(add(pt, uintptr(i)*unsafe.Sizeof(elfPhdr{})))
vdso_linux.go#L192: aux := (*elfVerdaux)(add(unsafe.Pointer(def), uintptr(def.vd_aux)))
vdso_linux.go#L201: def = (*elfVerdef)(add(unsafe.Pointer(def), uintptr(def.vd_next)))
|
The pages are generated with Golds v0.6.7. (GOOS=linux GOARCH=amd64)
Golds is a Go 101 project developed by Tapir Liu.
PR and bug reports are welcome and can be submitted to the issue list.
Please follow @Go100and1 (reachable from the left QR code) to get the latest news of Golds. |